{% load static %} Soundclassification-Chaithanya-large-data-final

Sound Classification using temporal augmentation and Mel frequency cepstral coefficients

Looking at the data

Loading Libraries

In [30]:
# System and os level library imports
import time 
import sys
import os

# array handling library imports
import pandas as pd
import numpy as np

# Sound processing library imports
import librosa
import librosa.display as display

# plotting library imports
import matplotlib.pyplot as plt
%matplotlib inline

# play sound in jupyter notebook
import IPython.display as ipd

# save the entities for futur use
import pickle

# Model building related libraries
import sklearn
from sklearn import metrics 
from sklearn.metrics import classification_report
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import LabelEncoder
# Deep learning library import
import keras.backend as K
from keras.utils import np_utils
from keras.models import model_from_json
from keras.utils import to_categorical
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten,BatchNormalization,Convolution2D, MaxPooling2D,Bidirectional, LSTM,SimpleRNN
from keras.callbacks import EarlyStopping, ModelCheckpoint, TensorBoard
from keras.optimizers import Adam

Defining some useful functions

In [31]:
def saveModel(model_obj,model_file_name, weights_file_name = None):
    """Save keras model to disk."""
    if weights_file_name is None:
        weights_file_name = model_file_name
    model_json = model_obj.to_json()
    with open("new_models/{}.json".format(model_file_name), "w") as json_file:
        json_file.write(model_json)
    # serialize weights to HDF5
    model_obj.save_weights("new_models/{}.h5".format(weights_file_name))
    print("Saved model {} to disk".format(model_file_name))
In [32]:
def loadModel(model_file_name, weights_file_name = None):
    """Load keras model from disk."""
    if weights_file_name is None:
        weights_file_name = model_file_name
    # load json and create model
    json_file = open('./new_models/{}.json'.format(model_file_name), 'r')
    loaded_model_json = json_file.read()
    json_file.close()
    loaded_model = model_from_json(loaded_model_json)
    # load weights into new model
    loaded_model.load_weights("./new_models/{}.h5".format(weights_file_name))
    print("Loaded model {} from disk".format(model_file_name))
    return loaded_model
In [33]:
def get_predicted_classes(model_obj, val_data, test_data= None):
    """Function to return predicted classes."""
    val_preds = model_obj.predict_classes(val_data)
    if test_data is not None:
        test_preds = model_obj.predict_classes(test_data)
        return (val_preds, test_preds)
    return val_preds
In [34]:
def get_predicted_class_prob(model_obj, val_data, test_data= None):
    """Function to return predicted class probabilities."""
    val_preds = pd.DataFrame(model_obj.predict_proba(val_data))
    if test_data is not None:
        test_preds = pd.DataFrame(model_obj.predict_proba(test_data))
        return (val_preds, test_preds)
    return val_preds
In [35]:
def print_metrics(model_obj, train_x, train_y, test_x, test_y):
    """Function to print the model metrics."""
    print(model_obj.metrics_names)
    print(model_obj.evaluate(train_x, train_y))
    print(model_obj.evaluate(test_x, test_y))
    train_preds = model_obj.predict_classes(train_x)
    test_preds = model_obj.predict_classes(test_x)
    target_names = lb.classes_
    return (pd.DataFrame(classification_report(y_pred=test_preds,
                          y_true=test_y.argmax(axis=1),
                          target_names = target_names,
                         output_dict = True
                         )))
In [36]:
os.getcwd()
Out[36]:
'/nfsroot/data/home/bglrgpuaccess/BnglrMisc/Mahidhar/Music'
In [37]:
path = "/home/bglrgpuaccess/BnglrMisc/Mahidhar/Music/augmented_genres/"
In [38]:
data_info = {}
for genre in os.listdir(path):
    f =  os.listdir(path+'/'+str(genre))
    data_info[genre]=len(f)
In [39]:
data_info
Out[39]:
{'blues': 701,
 'classical': 701,
 'country': 701,
 'disco': 701,
 'hiphop': 701,
 'jazz': 702,
 'metal': 701,
 'pop': 701,
 'reggae': 701,
 'rock': 701}
In [40]:
ipd.Audio('/home/bglrgpuaccess/BnglrMisc/Mahidhar/Music/augmented_genres/blues/blues.00000.au_original.wav') # load a local WAV file
Out[40]:
In [41]:
d,sr = librosa.load('/home/bglrgpuaccess/BnglrMisc/Mahidhar/Music/augmented_genres/blues/blues.00000.au_original.wav') # load a local WAV file

Visualizing Audio

In [42]:
plt.axis('off')
plt.style.use('ggplot')
plt.style.use(['dark_background'])
plt.figure(figsize=(14, 5),)

librosa.display.waveplot(d, sr=sr,color='r',)
Out[42]:
<matplotlib.collections.PolyCollection at 0x1b60d390>

X - time y - amplitude

Spectogram

A spectrogram is a visual representation of the spectrum of frequencies of sound or other signals as they vary with time.
In [43]:
X = librosa.stft(d)
Xdb = librosa.amplitude_to_db(abs(X))
plt.figure(figsize=(14, 5))
librosa.display.specshow(Xdb, sr=sr, x_axis='time', y_axis='hz')
plt.title("  of the sample song")
plt.colorbar()
plt.show()
In [44]:
librosa.display.specshow(Xdb, sr=sr, x_axis='time', y_axis='log')
plt.title("Log transormed frequency")
plt.colorbar()
plt.show()

Feature extraction

Load Audio files and extract features

Zero Crossing Rate

The zero crossing rate is the rate of sign-changes along a signal, i.e., the rate at which the signal changes from positive to negative or back.

In [45]:
np.sum(librosa.feature.zero_crossing_rate(d))
Out[45]:
109.67919921875

Spectral Centroid

It indicates where the centre of mass for a sound is located and is calculated as the weighted mean of the frequencies present in the sound.

Consider two songs, one from a blues genre and the other belonging to metal. Now as compared to the blues genre song which is the same throughout its length, the metal song has more frequencies towards the end. So spectral centroid for blues song will lie somewhere near the middle of its spectrum while that for a metal song would be towards its end.

In [46]:
# Normalising the spectral centroid for visualisation
def normalize(d, axis=0):
    """This function will scale the features along the time axis by default."""
    return sklearn.preprocessing.minmax_scale(d, axis=axis)
In [47]:
def plot_spectral_centroid(d,genre):
    """Function to plot the spectral centroid."""
    spectral_centroids = librosa.feature.spectral_centroid(d, sr=sr)[0]
    # Computing the time variable for visualization
    frames = range(len(spectral_centroids))
    t = librosa.frames_to_time(frames)
    #Plotting the Spectral Centroid along the waveform
    fig, ax = plt.subplots( nrows=1, ncols=1 )
    librosa.display.waveplot(d, sr=sr, alpha=0.4)
    plt.plot(t, normalize(spectral_centroids), color='r')
    plt.title("Spectral Centroid of a {} song".format(genre))
    plt.axhline(y=0.5, color='black', linestyle='--')
    fig.set_size_inches(9.25, 4.5)
    fig.savefig('img/{}_spectral_centroid.png'.format(genre))
    plt.show()
In [48]:
for k in data_info.keys():
    data_path = '/home/bglrgpuaccess/BnglrMisc/Mahidhar/Music/augmented_genres/{}/{}.00000.au_original.wav'.format(k,k)
    d,sr = librosa.load(data_path)
    plot_spectral_centroid(d,k)

Spectral Rolloff

It is a measure of the shape of the signal. It represents the frequency below which a specified percentage of the total spectral energy, e.g. 85%, lies.

In [49]:
def plot_spectral_rolloff(d,genre):
    """Function to plot the spectral roll off."""
    #Plotting the Spectral Rolloff along the waveform
    fig, ax = plt.subplots( nrows=1, ncols=1 )
    spectral_rolloff = librosa.feature.spectral_rolloff(d+0.01, sr=sr)[0]
    librosa.display.waveplot(d, sr=sr, alpha=0.4)
    frames = range(len(spectral_rolloff))
    t = librosa.frames_to_time(frames)
    plt.plot(t, normalize(spectral_rolloff), color='r')
    plt.title("Spectral Rolloff of a {} song".format(genre))
    plt.axhline(y=0.5, color='black', linestyle='--')
    fig.set_size_inches(9.25, 4.5)
    fig.savefig('img/{}_spectral_rolloff.png'.format(genre))
    plt.show()
In [50]:
for k in data_info.keys():
    data_path = '/home/bglrgpuaccess/BnglrMisc/Mahidhar/Music/augmented_genres/{}/{}.00000.au_original.wav'.format(k,k)
    d,sr = librosa.load(data_path)
    plot_spectral_rolloff(d,k)

Mel-Frequency Cepstral Coefficients

The Mel frequency cepstral coefficients (MFCCs) of a signal are a small set of features (usually about 10–20) which concisely describe the overall shape of a spectral envelope. It models the characteristics of the human voice.

In [51]:
def plot_mfcc(d,genre):
    """Function to plot the mfcc."""
    fig, ax = plt.subplots( nrows=1, ncols=1 )
    mfccs = librosa.feature.mfcc(d)
    librosa.display.specshow(mfccs, sr=sr, x_axis='time')
    fig.savefig('img/{}_mfcc.png'.format(genre))
    plt.show()
    return mfccs
In [52]:
for k in data_info.keys():
    data_path = '/home/bglrgpuaccess/BnglrMisc/Mahidhar/Music/augmented_genres/{}/{}.00000.au_original.wav'.format(k,k)
    d,sr = librosa.load(data_path)
    mfccs = plot_mfcc(d,k)
We can also perform feature scaling such that each coefficient dimension has zero mean and unit variance
In [53]:
mfccs = sklearn.preprocessing.scale(mfccs, axis=1)
plt.figure(figsize=(15, 5))
librosa.display.specshow(mfccs, sr=sr, x_axis='time')
Out[53]:
<matplotlib.axes._subplots.AxesSubplot at 0xcd74a90>

Chroma Frequencies

Chroma features are an interesting and powerful representation for music audio in which the entire spectrum is projected onto 12 bins representing the 12 distinct semitones (or chroma) of the musical octave.

In [54]:
hop_length = 512
chromagram = librosa.feature.chroma_stft(d, sr=sr, hop_length=hop_length)
plt.figure(figsize=(15, 5))
librosa.display.specshow(chromagram, x_axis='time', y_axis='chroma', hop_length=hop_length, cmap='coolwarm')
Out[54]:
<matplotlib.axes._subplots.AxesSubplot at 0xcd51d90>
In [55]:
def extract_features(y,sr):
    """This function extracts the features from songs, 
    We are extracting the Chroma features, spectral centroid, spectral bandwidth, spectral roll off, zero crossing rate and the mfccc's"""
    chroma_freq = librosa.feature.chroma_stft(y=y, sr=sr)
    specral_centroid = librosa.feature.spectral_centroid(y=y, sr=sr)
    spectral_bandwidth = librosa.feature.spectral_bandwidth(y=y, sr=sr)
    spectral_rolloff = librosa.feature.spectral_rolloff(y=y, sr=sr)
    zero_crossing_rate = librosa.feature.zero_crossing_rate(y)
    mfcc = np.mean(librosa.feature.mfcc(y=y, sr=sr,n_mfcc=39).T,axis = 0)
    chroma_freq_arr = np.mean(np.ndarray.flatten(chroma_freq))
    spectral_centroid_arr = np.mean(np.ndarray.flatten(specral_centroid))
    spectral_bandwidth_arr = np.mean(np.ndarray.flatten(spectral_bandwidth))
    spectral_rolloff_arr = np.mean(np.ndarray.flatten(spectral_rolloff))
    zero_crossing_rate_arr = np.mean(np.ndarray.flatten(zero_crossing_rate))
    mfcc_arr = np.ndarray.flatten(mfcc)
    temp = np.array([chroma_freq_arr,spectral_centroid_arr,spectral_bandwidth_arr,spectral_rolloff_arr,zero_crossing_rate_arr])
    all_features =  np.concatenate([temp,mfcc_arr])
    return all_features
In [56]:
def loadAudioDataExtractFeatures(file_ID,genre, file_format = 'wav'):
    """Load song and extract the features using the extract_features function"""
    data_path = '/home/bglrgpuaccess/BnglrMisc/Mahidhar/Music/augmented_genres/{}/{}.wav'.format(genre,file_ID)
    y,sr = librosa.load(data_path)
    features = extract_features(y,sr)
    return features

Extracting the features from the songs

In [57]:
# X = []
# y= []
# for genre in os.listdir(path):
#     print("genre",genre)
#     path_genre = path+'/'+str(genre)
#     for i in os.listdir(path_genre):
#         if i.endswith('wav'): 
#             file_ID = i.strip('.wav') #file format already provided
#             X.append(loadAudioDataExtractFeatures(genre= genre,file_ID=file_ID))
#             y.append(genre)
#     print("genre ended",genre)

Saving the numpy arrays extracted

In [58]:
# np.save('X_train_many_features.npy', np.array(X))
X = np.load('X_train_many_features.npy')
In [59]:
# np.save('y_train_many_features.npy', np.array(y))
In [60]:
y = np.load('y_train_many_features.npy')

label encoder

In [61]:
X = np.array(X)
y = np.array(y)

lb = LabelEncoder()

y = to_categorical(lb.fit_transform(y))
Saving label encoder
In [62]:
pickle.dump(lb, open("label_encoder.pkl","wb"), protocol=pickle.HIGHEST_PROTOCOL)
In [63]:
num_labels = y.shape[1]
In [65]:
def recall(y_true, y_pred):
    """Recall metric.

    Only computes a batch-wise average of recall.

    Computes the recall, a metric for multi-label classification of
    how many relevant items are selected.
    """
    true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
    possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))
    recall = true_positives / (possible_positives + K.epsilon())
    return recall
In [66]:
x_train, x_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 124)

scaling

In [67]:
scaler = StandardScaler()
scaler.fit(np.array(x_train[:, :-1], dtype = float))
x_train= scaler.transform(np.array(x_train[:, :-1], dtype = float))
x_test = scaler.transform(np.array(x_test[:, :-1], dtype = float))
Saving scaler
In [68]:
pickle.dump(scaler, open("scaler.pkl","wb"), protocol=pickle.HIGHEST_PROTOCOL)
In [69]:
# with open('scaler.pkl', 'rb') as pickle_file:
#     tep = pickle.load(pickle_file)
In [70]:
scaler.mean_
Out[70]:
array([ 4.44932447e-01,  1.98883668e+03,  1.86400916e+03,  4.10105598e+03,
        1.22008101e-01, -2.08531678e+02,  1.17085010e+02, -3.49108502e+01,
        5.53811233e+01, -1.81386979e+01,  3.27661516e+01, -1.35187139e+01,
        1.56882896e+01, -2.96360469e+00,  2.36319041e+00,  4.59905077e+00,
       -3.80646642e+00,  7.22543597e+00, -6.18049154e+00,  7.37023859e+00,
       -6.65921546e+00,  5.27115960e+00, -3.48929561e+00,  1.93513867e+00,
       -1.64674625e+00, -9.14070832e-01,  1.34627576e+00, -3.18669983e+00,
        2.79304783e+00, -3.89585254e+00,  2.88975766e+00, -4.06776351e+00,
        1.86836870e+00, -2.20212821e+00,  3.32505737e-01, -4.75414669e-01,
       -1.61384943e+00,  1.03890302e+00, -2.78576010e+00,  1.73495142e+00,
       -2.89650449e+00,  1.68726187e+00, -2.19240207e+00,  4.66908693e-01])

Model Building

Basic Neural Network

In [71]:
model_name = 'Basic_NN'
tb = TensorBoard(log_dir='./log/{}'.format(model_name), histogram_freq=0,  
          write_graph=True, write_images=True)
callbacks = [EarlyStopping(monitor='val_loss', min_delta= 0.005, patience=50),
            EarlyStopping(monitor='val_acc', min_delta= 0.005, patience=50),
            EarlyStopping(monitor='val_recall', min_delta= 0.005, patience=50),tb]

model_basic_nn = Sequential()
input_shape = x_train.shape[1]
num_labels = 10
model_basic_nn.add(Dense(256, input_shape=(input_shape,)))
model_basic_nn.add(Activation('relu'))
model_basic_nn.add(Dense(128))
model_basic_nn.add(Activation('relu'))
model_basic_nn.add(Dense(num_labels))
model_basic_nn.add(Activation('softmax'))

model_basic_nn.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['acc',recall])
In [72]:
model_basic_nn.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_1 (Dense)              (None, 256)               11520     
_________________________________________________________________
activation_1 (Activation)    (None, 256)               0         
_________________________________________________________________
dense_2 (Dense)              (None, 128)               32896     
_________________________________________________________________
activation_2 (Activation)    (None, 128)               0         
_________________________________________________________________
dense_3 (Dense)              (None, 10)                1290      
_________________________________________________________________
activation_3 (Activation)    (None, 10)                0         
=================================================================
Total params: 45,706
Trainable params: 45,706
Non-trainable params: 0
_________________________________________________________________
In [76]:
%time model_basic_nn.fit(x_train, y_train, callbacks = callbacks,epochs=15000, validation_split=0.25)
Train on 4206 samples, validate on 1402 samples
Epoch 1/15000
4206/4206 [==============================] - 1s 142us/step - loss: 0.7368 - acc: 0.7649 - recall: 0.6262 - val_loss: 0.7673 - val_acc: 0.7418 - val_recall: 0.6205
Epoch 2/15000
4206/4206 [==============================] - 1s 127us/step - loss: 0.5551 - acc: 0.8295 - recall: 0.7268 - val_loss: 0.6160 - val_acc: 0.8031 - val_recall: 0.7090
Epoch 3/15000
4206/4206 [==============================] - 1s 131us/step - loss: 0.4317 - acc: 0.8709 - recall: 0.8019 - val_loss: 0.5530 - val_acc: 0.8146 - val_recall: 0.7511
Epoch 4/15000
4206/4206 [==============================] - 1s 129us/step - loss: 0.3283 - acc: 0.9108 - recall: 0.8552 - val_loss: 0.4352 - val_acc: 0.8545 - val_recall: 0.8046
Epoch 5/15000
4206/4206 [==============================] - 1s 123us/step - loss: 0.2578 - acc: 0.9296 - recall: 0.8954 - val_loss: 0.4034 - val_acc: 0.8588 - val_recall: 0.8288
Epoch 6/15000
4206/4206 [==============================] - 0s 111us/step - loss: 0.2041 - acc: 0.9472 - recall: 0.9237 - val_loss: 0.3107 - val_acc: 0.8980 - val_recall: 0.8702
Epoch 7/15000
4206/4206 [==============================] - 1s 128us/step - loss: 0.1530 - acc: 0.9648 - recall: 0.9453 - val_loss: 0.2663 - val_acc: 0.9223 - val_recall: 0.8937
Epoch 8/15000
4206/4206 [==============================] - 1s 134us/step - loss: 0.1204 - acc: 0.9765 - recall: 0.9610 - val_loss: 0.2277 - val_acc: 0.9272 - val_recall: 0.9094
Epoch 9/15000
4206/4206 [==============================] - 1s 122us/step - loss: 0.0954 - acc: 0.9836 - recall: 0.9738 - val_loss: 0.2134 - val_acc: 0.9344 - val_recall: 0.9151
Epoch 10/15000
4206/4206 [==============================] - 1s 132us/step - loss: 0.0728 - acc: 0.9893 - recall: 0.9822 - val_loss: 0.1758 - val_acc: 0.9508 - val_recall: 0.9351
Epoch 11/15000
4206/4206 [==============================] - 0s 107us/step - loss: 0.0617 - acc: 0.9910 - recall: 0.9857 - val_loss: 0.1743 - val_acc: 0.9486 - val_recall: 0.9337
Epoch 12/15000
4206/4206 [==============================] - 0s 106us/step - loss: 0.0547 - acc: 0.9900 - recall: 0.9864 - val_loss: 0.1506 - val_acc: 0.9593 - val_recall: 0.9465
Epoch 13/15000
4206/4206 [==============================] - 0s 114us/step - loss: 0.0403 - acc: 0.9960 - recall: 0.9936 - val_loss: 0.1571 - val_acc: 0.9529 - val_recall: 0.9408
Epoch 14/15000
4206/4206 [==============================] - 1s 131us/step - loss: 0.0457 - acc: 0.9919 - recall: 0.9898 - val_loss: 0.1425 - val_acc: 0.9536 - val_recall: 0.9465
Epoch 15/15000
4206/4206 [==============================] - 1s 131us/step - loss: 0.0270 - acc: 0.9969 - recall: 0.9960 - val_loss: 0.1276 - val_acc: 0.9586 - val_recall: 0.9515
Epoch 16/15000
4206/4206 [==============================] - 1s 128us/step - loss: 0.0199 - acc: 0.9990 - recall: 0.9988 - val_loss: 0.1124 - val_acc: 0.9672 - val_recall: 0.9608
Epoch 17/15000
4206/4206 [==============================] - 1s 135us/step - loss: 0.0178 - acc: 0.9981 - recall: 0.9976 - val_loss: 0.1152 - val_acc: 0.9658 - val_recall: 0.9579
Epoch 18/15000
4206/4206 [==============================] - 0s 107us/step - loss: 0.0159 - acc: 0.9988 - recall: 0.9986 - val_loss: 0.1081 - val_acc: 0.9708 - val_recall: 0.9693
Epoch 19/15000
4206/4206 [==============================] - 1s 131us/step - loss: 0.0148 - acc: 0.9988 - recall: 0.9988 - val_loss: 0.1069 - val_acc: 0.9722 - val_recall: 0.9672
Epoch 20/15000
4206/4206 [==============================] - 0s 108us/step - loss: 0.0147 - acc: 0.9976 - recall: 0.9974 - val_loss: 0.1144 - val_acc: 0.9672 - val_recall: 0.9615
Epoch 21/15000
4206/4206 [==============================] - 0s 115us/step - loss: 0.0106 - acc: 0.9993 - recall: 0.9993 - val_loss: 0.1043 - val_acc: 0.9693 - val_recall: 0.9658
Epoch 22/15000
4206/4206 [==============================] - 1s 130us/step - loss: 0.0092 - acc: 0.9993 - recall: 0.9993 - val_loss: 0.1006 - val_acc: 0.9736 - val_recall: 0.9686
Epoch 23/15000
4206/4206 [==============================] - 1s 125us/step - loss: 0.0146 - acc: 0.9974 - recall: 0.9967 - val_loss: 0.1746 - val_acc: 0.9522 - val_recall: 0.9486
Epoch 24/15000
4206/4206 [==============================] - 1s 128us/step - loss: 0.0468 - acc: 0.9883 - recall: 0.9857 - val_loss: 0.3329 - val_acc: 0.9073 - val_recall: 0.9037
Epoch 25/15000
4206/4206 [==============================] - 0s 117us/step - loss: 0.0805 - acc: 0.9762 - recall: 0.9736 - val_loss: 0.1856 - val_acc: 0.9451 - val_recall: 0.9408
Epoch 26/15000
4206/4206 [==============================] - 1s 130us/step - loss: 0.0474 - acc: 0.9862 - recall: 0.9848 - val_loss: 0.1518 - val_acc: 0.9558 - val_recall: 0.9508
Epoch 27/15000
4206/4206 [==============================] - 1s 132us/step - loss: 0.0165 - acc: 0.9971 - recall: 0.9971 - val_loss: 0.1374 - val_acc: 0.9629 - val_recall: 0.9593
Epoch 28/15000
4206/4206 [==============================] - 1s 120us/step - loss: 0.0267 - acc: 0.9936 - recall: 0.9931 - val_loss: 0.1156 - val_acc: 0.9650 - val_recall: 0.9622
Epoch 29/15000
4206/4206 [==============================] - 1s 125us/step - loss: 0.0074 - acc: 0.9993 - recall: 0.9993 - val_loss: 0.1075 - val_acc: 0.9693 - val_recall: 0.9672
Epoch 30/15000
4206/4206 [==============================] - 0s 117us/step - loss: 0.0050 - acc: 0.9993 - recall: 0.9993 - val_loss: 0.0956 - val_acc: 0.9715 - val_recall: 0.9693
Epoch 31/15000
4206/4206 [==============================] - 1s 131us/step - loss: 0.0147 - acc: 0.9964 - recall: 0.9964 - val_loss: 0.0997 - val_acc: 0.9700 - val_recall: 0.9679
Epoch 32/15000
4206/4206 [==============================] - 1s 131us/step - loss: 0.0063 - acc: 0.9990 - recall: 0.9990 - val_loss: 0.0938 - val_acc: 0.9750 - val_recall: 0.9708
Epoch 33/15000
4206/4206 [==============================] - 1s 126us/step - loss: 0.0109 - acc: 0.9983 - recall: 0.9983 - val_loss: 0.0941 - val_acc: 0.9722 - val_recall: 0.9700
Epoch 34/15000
4206/4206 [==============================] - 1s 132us/step - loss: 0.0042 - acc: 0.9995 - recall: 0.9995 - val_loss: 0.1109 - val_acc: 0.9672 - val_recall: 0.9636
Epoch 35/15000
4206/4206 [==============================] - 0s 114us/step - loss: 0.0051 - acc: 0.9988 - recall: 0.9988 - val_loss: 0.1031 - val_acc: 0.9715 - val_recall: 0.9700
Epoch 36/15000
4206/4206 [==============================] - 0s 105us/step - loss: 0.0045 - acc: 0.9993 - recall: 0.9993 - val_loss: 0.0927 - val_acc: 0.9715 - val_recall: 0.9700
Epoch 37/15000
4206/4206 [==============================] - 0s 112us/step - loss: 0.0142 - acc: 0.9967 - recall: 0.9964 - val_loss: 0.1668 - val_acc: 0.9536 - val_recall: 0.9501
Epoch 38/15000
4206/4206 [==============================] - 1s 120us/step - loss: 0.1072 - acc: 0.9712 - recall: 0.9696 - val_loss: 0.3506 - val_acc: 0.9016 - val_recall: 0.8951
Epoch 39/15000
4206/4206 [==============================] - 0s 116us/step - loss: 0.1079 - acc: 0.9653 - recall: 0.9615 - val_loss: 0.1909 - val_acc: 0.9444 - val_recall: 0.9358
Epoch 40/15000
4206/4206 [==============================] - 0s 113us/step - loss: 0.0145 - acc: 0.9969 - recall: 0.9967 - val_loss: 0.1125 - val_acc: 0.9650 - val_recall: 0.9622
Epoch 41/15000
4206/4206 [==============================] - 1s 123us/step - loss: 0.0079 - acc: 0.9979 - recall: 0.9979 - val_loss: 0.0989 - val_acc: 0.9757 - val_recall: 0.9757
Epoch 42/15000
4206/4206 [==============================] - 1s 133us/step - loss: 0.0051 - acc: 0.9990 - recall: 0.9990 - val_loss: 0.1088 - val_acc: 0.9743 - val_recall: 0.9743
Epoch 43/15000
4206/4206 [==============================] - 1s 122us/step - loss: 0.0034 - acc: 0.9993 - recall: 0.9993 - val_loss: 0.0976 - val_acc: 0.9765 - val_recall: 0.9757
Epoch 44/15000
4206/4206 [==============================] - 1s 138us/step - loss: 0.0036 - acc: 0.9993 - recall: 0.9993 - val_loss: 0.0881 - val_acc: 0.9772 - val_recall: 0.9772
Epoch 45/15000
4206/4206 [==============================] - 1s 127us/step - loss: 0.0033 - acc: 0.9990 - recall: 0.9990 - val_loss: 0.0845 - val_acc: 0.9765 - val_recall: 0.9765
Epoch 46/15000
4206/4206 [==============================] - 1s 130us/step - loss: 0.0041 - acc: 0.9990 - recall: 0.9990 - val_loss: 0.0882 - val_acc: 0.9772 - val_recall: 0.9757
Epoch 47/15000
4206/4206 [==============================] - 1s 138us/step - loss: 0.0028 - acc: 0.9990 - recall: 0.9990 - val_loss: 0.0839 - val_acc: 0.9779 - val_recall: 0.9765
Epoch 48/15000
4206/4206 [==============================] - 1s 123us/step - loss: 0.0024 - acc: 0.9995 - recall: 0.9995 - val_loss: 0.0857 - val_acc: 0.9772 - val_recall: 0.9765
Epoch 49/15000
4206/4206 [==============================] - 0s 116us/step - loss: 0.0042 - acc: 0.9990 - recall: 0.9990 - val_loss: 0.0852 - val_acc: 0.9800 - val_recall: 0.9793
Epoch 50/15000
4206/4206 [==============================] - 0s 105us/step - loss: 0.0032 - acc: 0.9993 - recall: 0.9993 - val_loss: 0.0898 - val_acc: 0.9786 - val_recall: 0.9779
Epoch 51/15000
4206/4206 [==============================] - 0s 109us/step - loss: 0.0034 - acc: 0.9990 - recall: 0.9990 - val_loss: 0.0965 - val_acc: 0.9772 - val_recall: 0.9765
CPU times: user 45 s, sys: 5.88 s, total: 50.9 s
Wall time: 26.8 s
Out[76]:
<keras.callbacks.History at 0xd158810>
In [77]:
model_basic_nn.metrics_names
Out[77]:
['loss', 'acc', 'recall']
In [78]:
model_basic_nn.evaluate(x_train, y_train)
5608/5608 [==============================] - 0s 41us/step
Out[78]:
[0.02771695297084863, 0.9934022824536377, 0.9932239657631954]
In [79]:
train_preds_basic_nn = model_basic_nn.predict_classes(x_train)
test_preds_basic_nn = model_basic_nn.predict_classes(x_test)
In [80]:
train_preds_basic_nn_proba,test_preds_basic_nn_proba = get_predicted_class_prob(model_basic_nn,x_train, x_test)
train_preds_basic_nn,test_preds_basic_nn = get_predicted_classes(model_basic_nn,x_train, x_test)
In [81]:
saveModel(model_basic_nn,"model_basic_nn")
Saved model model_basic_nn to disk
In [82]:
model_basic_nn = loadModel('model_basic_nn')
Loaded model model_basic_nn from disk
In [83]:
model_basic_nn.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['acc',recall])
In [84]:
x_train[0]
Out[84]:
array([-1.98896239, -1.27421061, -0.55899042, -1.20583012, -1.17169541,
        0.01550273,  0.72142471,  0.79821175,  0.4354028 ,  0.74100518,
       -1.25599166, -0.71875966, -0.09077415, -1.87667268, -0.63167897,
       -1.0948513 , -1.78806273, -0.78063185, -0.37952927, -0.48799131,
       -0.86232644, -0.15337243, -1.0548783 , -0.00225718, -0.57859402,
       -1.41891559, -0.62721687, -0.52262825, -0.02158828, -0.0369075 ,
        0.94060381, -0.11293259, -0.21549473, -0.25227307, -0.19368065,
       -0.15143519, -0.6603911 , -0.21951001, -0.17598373,  0.82966974,
        0.92106679,  1.53669327,  1.07747686,  0.32755302])
In [87]:
model_basic_nn_df = print_metrics(model_basic_nn,x_train,y_train, x_test, y_test)
['loss', 'acc', 'recall']
5608/5608 [==============================] - 0s 65us/step
[0.02771695297084863, 0.9934022824536377, 0.9932239657631954]
1402/1402 [==============================] - 0s 47us/step
[0.09540914482743972, 0.9764621968616263, 0.9750356633380884]
In [88]:
model_basic_nn_df
Out[88]:
blues classical country disco hiphop jazz macro avg metal micro avg pop reggae rock weighted avg
f1-score 0.996610 0.992908 0.979253 0.972549 0.959707 0.985816 0.976462 0.961039 0.976462 0.989399 0.986577 0.940767 0.976551
precision 0.993243 0.992908 0.975207 0.976378 0.929078 0.985816 0.976424 1.000000 0.976462 1.000000 0.993243 0.918367 0.977321
recall 1.000000 0.992908 0.983333 0.968750 0.992424 0.985816 0.977154 0.925000 0.976462 0.979021 0.980000 0.964286 0.976462
support 147.000000 141.000000 120.000000 128.000000 132.000000 141.000000 1402.000000 160.000000 1402.000000 143.000000 150.000000 140.000000 1402.000000
In [92]:
model_basic_nn_df.to_csv("model_basic_nn_df.csv",index=True)

RNN

Reshaping the arrays for the pesky RNN.
In [93]:
x_rnn_train  = x_train.reshape(x_train.shape[0], 1, x_train.shape[1])
x_rnn_test  = x_test.reshape(x_test.shape[0], 1, x_test.shape[1])
In [94]:
x_rnn_train.shape
Out[94]:
(5608, 1, 44)
In [95]:
model_name = 'Basic_RNN'
tb = TensorBoard(log_dir='./log/{}'.format(model_name), histogram_freq=0,  
          write_graph=True, write_images=True)
callbacks = [EarlyStopping(monitor='val_loss', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_acc', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_recall', min_delta= 0.005, patience=20),tb]

model_basic_rnn = Sequential()
model_basic_rnn.add(SimpleRNN(256 , input_shape=(x_rnn_train.shape[1], x_rnn_train.shape[2])))
model_basic_rnn.add(Dense(256))
model_basic_rnn.add(Activation('relu'))
model_basic_rnn.add(Dropout(0.5))

model_basic_rnn.add(Dense(256))
model_basic_rnn.add(Activation('relu'))
model_basic_rnn.add(Dropout(0.5))

model_basic_rnn.add(Dense(num_labels))
model_basic_rnn.add(Activation('softmax'))

model_basic_rnn.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['acc',recall])
In [96]:
model_basic_rnn.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
simple_rnn_1 (SimpleRNN)     (None, 256)               77056     
_________________________________________________________________
dense_4 (Dense)              (None, 256)               65792     
_________________________________________________________________
activation_4 (Activation)    (None, 256)               0         
_________________________________________________________________
dropout_1 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_5 (Dense)              (None, 256)               65792     
_________________________________________________________________
activation_5 (Activation)    (None, 256)               0         
_________________________________________________________________
dropout_2 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_6 (Dense)              (None, 10)                2570      
_________________________________________________________________
activation_6 (Activation)    (None, 10)                0         
=================================================================
Total params: 211,210
Trainable params: 211,210
Non-trainable params: 0
_________________________________________________________________
In [104]:
%time model_basic_rnn.fit(x_rnn_train, y_train, callbacks = callbacks,epochs=15000, validation_split=0.25)
Train on 4206 samples, validate on 1402 samples
Epoch 1/15000
4206/4206 [==============================] - 1s 204us/step - loss: 0.2567 - acc: 0.9144 - recall: 0.8966 - val_loss: 0.1633 - val_acc: 0.9529 - val_recall: 0.9315
Epoch 2/15000
4206/4206 [==============================] - 1s 210us/step - loss: 0.2528 - acc: 0.9182 - recall: 0.8987 - val_loss: 0.1388 - val_acc: 0.9551 - val_recall: 0.9408
Epoch 3/15000
4206/4206 [==============================] - 1s 167us/step - loss: 0.2060 - acc: 0.9320 - recall: 0.9199 - val_loss: 0.1308 - val_acc: 0.9579 - val_recall: 0.9451
Epoch 4/15000
4206/4206 [==============================] - 1s 189us/step - loss: 0.2208 - acc: 0.9289 - recall: 0.9146 - val_loss: 0.1287 - val_acc: 0.9593 - val_recall: 0.9494
Epoch 5/15000
4206/4206 [==============================] - 1s 201us/step - loss: 0.2220 - acc: 0.9251 - recall: 0.9123 - val_loss: 0.1097 - val_acc: 0.9643 - val_recall: 0.9515
Epoch 6/15000
4206/4206 [==============================] - 1s 205us/step - loss: 0.2043 - acc: 0.9346 - recall: 0.9201 - val_loss: 0.1144 - val_acc: 0.9622 - val_recall: 0.9479
Epoch 7/15000
4206/4206 [==============================] - 1s 173us/step - loss: 0.1955 - acc: 0.9337 - recall: 0.9165 - val_loss: 0.1086 - val_acc: 0.9686 - val_recall: 0.9622
Epoch 8/15000
4206/4206 [==============================] - 1s 174us/step - loss: 0.1906 - acc: 0.9339 - recall: 0.9227 - val_loss: 0.1013 - val_acc: 0.9672 - val_recall: 0.9593
Epoch 9/15000
4206/4206 [==============================] - 1s 172us/step - loss: 0.1758 - acc: 0.9425 - recall: 0.9325 - val_loss: 0.0977 - val_acc: 0.9715 - val_recall: 0.9636
Epoch 10/15000
4206/4206 [==============================] - 1s 173us/step - loss: 0.1659 - acc: 0.9420 - recall: 0.9311 - val_loss: 0.0960 - val_acc: 0.9715 - val_recall: 0.9636
Epoch 11/15000
4206/4206 [==============================] - 1s 164us/step - loss: 0.1835 - acc: 0.9372 - recall: 0.9268 - val_loss: 0.1078 - val_acc: 0.9672 - val_recall: 0.9658
Epoch 12/15000
4206/4206 [==============================] - 1s 181us/step - loss: 0.1601 - acc: 0.9498 - recall: 0.9408 - val_loss: 0.0721 - val_acc: 0.9807 - val_recall: 0.9743
Epoch 13/15000
4206/4206 [==============================] - 1s 205us/step - loss: 0.1800 - acc: 0.9363 - recall: 0.9234 - val_loss: 0.0792 - val_acc: 0.9743 - val_recall: 0.9686
Epoch 14/15000
4206/4206 [==============================] - 1s 215us/step - loss: 0.1694 - acc: 0.9429 - recall: 0.9351 - val_loss: 0.0863 - val_acc: 0.9708 - val_recall: 0.9658
Epoch 15/15000
4206/4206 [==============================] - 1s 178us/step - loss: 0.1628 - acc: 0.9460 - recall: 0.9370 - val_loss: 0.0847 - val_acc: 0.9708 - val_recall: 0.9672
Epoch 16/15000
4206/4206 [==============================] - 1s 205us/step - loss: 0.1585 - acc: 0.9510 - recall: 0.9403 - val_loss: 0.0796 - val_acc: 0.9772 - val_recall: 0.9722
Epoch 17/15000
4206/4206 [==============================] - 1s 181us/step - loss: 0.1617 - acc: 0.9489 - recall: 0.9408 - val_loss: 0.0780 - val_acc: 0.9743 - val_recall: 0.9700
Epoch 18/15000
4206/4206 [==============================] - 1s 169us/step - loss: 0.1437 - acc: 0.9532 - recall: 0.9472 - val_loss: 0.0677 - val_acc: 0.9786 - val_recall: 0.9765
Epoch 19/15000
4206/4206 [==============================] - 1s 158us/step - loss: 0.1455 - acc: 0.9498 - recall: 0.9417 - val_loss: 0.0844 - val_acc: 0.9772 - val_recall: 0.9722
Epoch 20/15000
4206/4206 [==============================] - 1s 155us/step - loss: 0.1474 - acc: 0.9536 - recall: 0.9458 - val_loss: 0.0693 - val_acc: 0.9757 - val_recall: 0.9736
Epoch 21/15000
4206/4206 [==============================] - 1s 150us/step - loss: 0.1373 - acc: 0.9534 - recall: 0.9486 - val_loss: 0.0543 - val_acc: 0.9836 - val_recall: 0.9829
CPU times: user 28.2 s, sys: 2.81 s, total: 31 s
Wall time: 16.5 s
Out[104]:
<keras.callbacks.History at 0x25d3b7d0>
In [98]:
train_preds_basic_rnn_proba,test_preds_basic_rnn_proba = get_predicted_class_prob(model_basic_rnn,x_rnn_train, x_rnn_test)
train_preds_basic_rnn,test_preds_basic_rnn = get_predicted_classes(model_basic_rnn,x_rnn_train, x_rnn_test)
In [99]:
model_basic_rnn_df = print_metrics(model_basic_rnn,x_rnn_train,y_train,x_rnn_test,y_test )
['loss', 'acc', 'recall']
5608/5608 [==============================] - 0s 48us/step
[0.09083303886754027, 0.9777104136947218, 0.9673680456490727]
1402/1402 [==============================] - 0s 41us/step
[0.13658919252891003, 0.9657631954350927, 0.949358059914408]
In [100]:
model_basic_rnn_df
Out[100]:
blues classical country disco hiphop jazz macro avg metal micro avg pop reggae rock weighted avg
f1-score 0.986301 0.975439 0.953975 0.957198 0.947368 0.975610 0.965139 0.984326 0.965763 0.975265 0.945205 0.950704 0.965742
precision 0.993103 0.965278 0.957983 0.953488 0.940299 0.958904 0.965152 0.987421 0.965763 0.985714 0.971831 0.937500 0.966041
recall 0.979592 0.985816 0.950000 0.960938 0.954545 0.992908 0.965437 0.981250 0.965763 0.965035 0.920000 0.964286 0.965763
support 147.000000 141.000000 120.000000 128.000000 132.000000 141.000000 1402.000000 160.000000 1402.000000 143.000000 150.000000 140.000000 1402.000000
In [101]:
saveModel(model_basic_rnn,"model_basic_RNN")
Saved model model_basic_RNN to disk
In [105]:
model_basic_rnn_df.to_csv("model_basic_rnn_df.csv")

LSTM

In [106]:
model_name = 'Basic_LSTM'
tb = TensorBoard(log_dir='./log/{}'.format(model_name), histogram_freq=0,  
          write_graph=True, write_images=True)
callbacks = [EarlyStopping(monitor='val_loss', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_acc', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_recall', min_delta= 0.005, patience=20),tb]



model_lstm = Sequential()
model_lstm.add(LSTM(256 , input_shape=(x_rnn_train.shape[1], x_rnn_train.shape[2])))
model_lstm.add(BatchNormalization())
model_lstm.add(Dense(256))
model_lstm.add(Activation('relu'))
model_lstm.add(Dropout(0.5))

model_lstm.add(Dense(256))
model_lstm.add(Activation('relu'))
model_lstm.add(Dropout(0.5))

model_lstm.add(Dense(256))
model_lstm.add(Activation('relu'))
model_lstm.add(Dropout(0.5))

model_lstm.add(Dense(num_labels))
model_lstm.add(Activation('softmax'))

model_lstm.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['acc',recall])
In [107]:
model_lstm.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_1 (LSTM)                (None, 256)               308224    
_________________________________________________________________
batch_normalization_1 (Batch (None, 256)               1024      
_________________________________________________________________
dense_7 (Dense)              (None, 256)               65792     
_________________________________________________________________
activation_7 (Activation)    (None, 256)               0         
_________________________________________________________________
dropout_3 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_8 (Dense)              (None, 256)               65792     
_________________________________________________________________
activation_8 (Activation)    (None, 256)               0         
_________________________________________________________________
dropout_4 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_9 (Dense)              (None, 256)               65792     
_________________________________________________________________
activation_9 (Activation)    (None, 256)               0         
_________________________________________________________________
dropout_5 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_10 (Dense)             (None, 10)                2570      
_________________________________________________________________
activation_10 (Activation)   (None, 10)                0         
=================================================================
Total params: 509,194
Trainable params: 508,682
Non-trainable params: 512
_________________________________________________________________
In [109]:
%time model_lstm.fit(x_rnn_train, y_train, callbacks = callbacks,epochs=15000, validation_split=0.25)
Train on 4206 samples, validate on 1402 samples
Epoch 1/15000
4206/4206 [==============================] - 2s 567us/step - loss: 2.0500 - acc: 0.2746 - recall: 0.0956 - val_loss: 1.4518 - val_acc: 0.4608 - val_recall: 0.2504
Epoch 2/15000
4206/4206 [==============================] - 1s 312us/step - loss: 1.5256 - acc: 0.4517 - recall: 0.2727 - val_loss: 1.2215 - val_acc: 0.5920 - val_recall: 0.3445
Epoch 3/15000
4206/4206 [==============================] - 1s 294us/step - loss: 1.3138 - acc: 0.5380 - recall: 0.3678 - val_loss: 1.0304 - val_acc: 0.6555 - val_recall: 0.4551
Epoch 4/15000
4206/4206 [==============================] - 1s 333us/step - loss: 1.1863 - acc: 0.5858 - recall: 0.4346 - val_loss: 0.9219 - val_acc: 0.6954 - val_recall: 0.4964
Epoch 5/15000
4206/4206 [==============================] - 1s 316us/step - loss: 1.0882 - acc: 0.6205 - recall: 0.4838 - val_loss: 0.8504 - val_acc: 0.7218 - val_recall: 0.5606
Epoch 6/15000
4206/4206 [==============================] - 1s 298us/step - loss: 0.9753 - acc: 0.6655 - recall: 0.5414 - val_loss: 0.7446 - val_acc: 0.7568 - val_recall: 0.6170
Epoch 7/15000
4206/4206 [==============================] - 1s 308us/step - loss: 0.9220 - acc: 0.6919 - recall: 0.5775 - val_loss: 0.7074 - val_acc: 0.7703 - val_recall: 0.6277
Epoch 8/15000
4206/4206 [==============================] - 1s 346us/step - loss: 0.8469 - acc: 0.7080 - recall: 0.6013 - val_loss: 0.6278 - val_acc: 0.8017 - val_recall: 0.6783
Epoch 9/15000
4206/4206 [==============================] - 1s 352us/step - loss: 0.7773 - acc: 0.7485 - recall: 0.6493 - val_loss: 0.5945 - val_acc: 0.8088 - val_recall: 0.6912
Epoch 10/15000
4206/4206 [==============================] - 2s 374us/step - loss: 0.7498 - acc: 0.7515 - recall: 0.6676 - val_loss: 0.5230 - val_acc: 0.8338 - val_recall: 0.7361
Epoch 11/15000
4206/4206 [==============================] - 1s 326us/step - loss: 0.6934 - acc: 0.7775 - recall: 0.6914 - val_loss: 0.5222 - val_acc: 0.8431 - val_recall: 0.7475
Epoch 12/15000
4206/4206 [==============================] - 1s 298us/step - loss: 0.6531 - acc: 0.7863 - recall: 0.7126 - val_loss: 0.5093 - val_acc: 0.8459 - val_recall: 0.7432
Epoch 13/15000
4206/4206 [==============================] - 1s 332us/step - loss: 0.6010 - acc: 0.8067 - recall: 0.7316 - val_loss: 0.4237 - val_acc: 0.8680 - val_recall: 0.7989
Epoch 14/15000
4206/4206 [==============================] - 1s 330us/step - loss: 0.5490 - acc: 0.8272 - recall: 0.7680 - val_loss: 0.4078 - val_acc: 0.8652 - val_recall: 0.8103
Epoch 15/15000
4206/4206 [==============================] - 1s 338us/step - loss: 0.5152 - acc: 0.8317 - recall: 0.7782 - val_loss: 0.3779 - val_acc: 0.8730 - val_recall: 0.8203
Epoch 16/15000
4206/4206 [==============================] - 2s 367us/step - loss: 0.5040 - acc: 0.8395 - recall: 0.7882 - val_loss: 0.3253 - val_acc: 0.8944 - val_recall: 0.8417
Epoch 17/15000
4206/4206 [==============================] - 2s 375us/step - loss: 0.4390 - acc: 0.8500 - recall: 0.8088 - val_loss: 0.3611 - val_acc: 0.8795 - val_recall: 0.8352
Epoch 18/15000
4206/4206 [==============================] - 2s 359us/step - loss: 0.4293 - acc: 0.8676 - recall: 0.8243 - val_loss: 0.2791 - val_acc: 0.9130 - val_recall: 0.8645
Epoch 19/15000
4206/4206 [==============================] - 1s 352us/step - loss: 0.4133 - acc: 0.8638 - recall: 0.8286 - val_loss: 0.2789 - val_acc: 0.9237 - val_recall: 0.8631
Epoch 20/15000
4206/4206 [==============================] - 1s 348us/step - loss: 0.3617 - acc: 0.8825 - recall: 0.8497 - val_loss: 0.2612 - val_acc: 0.9116 - val_recall: 0.8816
Epoch 21/15000
4206/4206 [==============================] - 2s 367us/step - loss: 0.3718 - acc: 0.8785 - recall: 0.8447 - val_loss: 0.2308 - val_acc: 0.9351 - val_recall: 0.8923
CPU times: user 50.4 s, sys: 5.08 s, total: 55.4 s
Wall time: 31.3 s
Out[109]:
<keras.callbacks.History at 0x25037510>
In [110]:
train_preds_basic_lstm_proba,test_preds_basic_lstm_proba = get_predicted_class_prob(model_lstm,x_rnn_train, x_rnn_test)
train_preds_basic_lstm,test_preds_basic_lstm = get_predicted_classes(model_lstm,x_rnn_train, x_rnn_test)
In [111]:
model_lstm_df = print_metrics(model_lstm,x_rnn_train,y_train, x_rnn_test, y_test)
['loss', 'acc', 'recall']
5608/5608 [==============================] - 0s 86us/step
[0.14130648014038674, 0.9645149786019972, 0.9374108416547788]
1402/1402 [==============================] - 0s 93us/step
[0.22582029385462976, 0.9308131235982484, 0.898002852556879]
In [112]:
model_lstm_df
Out[112]:
blues classical country disco hiphop jazz macro avg metal micro avg pop reggae rock weighted avg
f1-score 0.943144 0.975779 0.9 0.900000 0.906475 0.93617 0.929440 0.965300 0.930813 0.957143 0.903915 0.906475 0.930738
precision 0.927632 0.952703 0.9 0.886364 0.863014 0.93617 0.930102 0.974522 0.930813 0.978102 0.969466 0.913043 0.932313
recall 0.959184 1.000000 0.9 0.914062 0.954545 0.93617 0.930394 0.956250 0.930813 0.937063 0.846667 0.900000 0.930813
support 147.000000 141.000000 120.0 128.000000 132.000000 141.00000 1402.000000 160.000000 1402.000000 143.000000 150.000000 140.000000 1402.000000
In [113]:
saveModel(model_lstm, 'Basic_LSTM')
Saved model Basic_LSTM to disk
In [114]:
model_lstm_df.to_csv("model_lstm_df.csv")

Bidirectional_LSTM

In [115]:
model_name = 'Bidirectional_LSTM'
tb = TensorBoard(log_dir='./log/{}'.format(model_name), histogram_freq=0,  
          write_graph=True, write_images=True)
callbacks = [EarlyStopping(monitor='val_loss', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_acc', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_recall', min_delta= 0.005, patience=20),tb]



model_bidi_lstm = Sequential()
model_bidi_lstm.add(Bidirectional(LSTM(256 ,return_sequences=True),
                        input_shape=(x_rnn_train.shape[1], x_rnn_train.shape[2])))

model_bidi_lstm.add(Bidirectional(LSTM(100)))


model_bidi_lstm.add(Dense(256))
model_bidi_lstm.add(Activation('relu'))
model_bidi_lstm.add(Dropout(0.5))

model_bidi_lstm.add(Dense(256))
model_bidi_lstm.add(Activation('relu'))
model_bidi_lstm.add(Dropout(0.5))

model_bidi_lstm.add(Dense(num_labels))
model_bidi_lstm.add(Activation('softmax'))

model_bidi_lstm.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['acc',recall])
In [116]:
model_bidi_lstm.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
bidirectional_1 (Bidirection (None, 1, 512)            616448    
_________________________________________________________________
bidirectional_2 (Bidirection (None, 200)               490400    
_________________________________________________________________
dense_11 (Dense)             (None, 256)               51456     
_________________________________________________________________
activation_11 (Activation)   (None, 256)               0         
_________________________________________________________________
dropout_6 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_12 (Dense)             (None, 256)               65792     
_________________________________________________________________
activation_12 (Activation)   (None, 256)               0         
_________________________________________________________________
dropout_7 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_13 (Dense)             (None, 10)                2570      
_________________________________________________________________
activation_13 (Activation)   (None, 10)                0         
=================================================================
Total params: 1,226,666
Trainable params: 1,226,666
Non-trainable params: 0
_________________________________________________________________
In [117]:
%time model_bidi_lstm.fit(x_rnn_train, y_train, callbacks = callbacks,epochs=15000, validation_split=0.25,batch_size=30)
Train on 4206 samples, validate on 1402 samples
Epoch 1/15000
4206/4206 [==============================] - 6s 1ms/step - loss: 1.6759 - acc: 0.3890 - recall: 0.1781 - val_loss: 1.2682 - val_acc: 0.5328 - val_recall: 0.3723
Epoch 2/15000
4206/4206 [==============================] - 3s 819us/step - loss: 1.1079 - acc: 0.6084 - recall: 0.4339 - val_loss: 0.9427 - val_acc: 0.6748 - val_recall: 0.5278
Epoch 3/15000
4206/4206 [==============================] - 3s 767us/step - loss: 0.8928 - acc: 0.6981 - recall: 0.5728 - val_loss: 0.8777 - val_acc: 0.6854 - val_recall: 0.6098
Epoch 4/15000
4206/4206 [==============================] - 3s 790us/step - loss: 0.6988 - acc: 0.7660 - recall: 0.6781 - val_loss: 0.7036 - val_acc: 0.7696 - val_recall: 0.7090
Epoch 5/15000
4206/4206 [==============================] - 3s 791us/step - loss: 0.5747 - acc: 0.8058 - recall: 0.7380 - val_loss: 0.6327 - val_acc: 0.8081 - val_recall: 0.7561
Epoch 6/15000
4206/4206 [==============================] - 3s 794us/step - loss: 0.4807 - acc: 0.8436 - recall: 0.7979 - val_loss: 0.4838 - val_acc: 0.8495 - val_recall: 0.8088
Epoch 7/15000
4206/4206 [==============================] - 3s 761us/step - loss: 0.3994 - acc: 0.8659 - recall: 0.8293 - val_loss: 0.4616 - val_acc: 0.8431 - val_recall: 0.8088
Epoch 8/15000
4206/4206 [==============================] - 3s 776us/step - loss: 0.2918 - acc: 0.9013 - recall: 0.8780 - val_loss: 0.3611 - val_acc: 0.8830 - val_recall: 0.8616
Epoch 9/15000
4206/4206 [==============================] - 3s 741us/step - loss: 0.2385 - acc: 0.9213 - recall: 0.8999 - val_loss: 0.3090 - val_acc: 0.8944 - val_recall: 0.8795
Epoch 10/15000
4206/4206 [==============================] - 3s 793us/step - loss: 0.2133 - acc: 0.9303 - recall: 0.9199 - val_loss: 0.3400 - val_acc: 0.8994 - val_recall: 0.8837
Epoch 11/15000
4206/4206 [==============================] - 3s 775us/step - loss: 0.1882 - acc: 0.9368 - recall: 0.9258 - val_loss: 0.2200 - val_acc: 0.9387 - val_recall: 0.9244
Epoch 12/15000
4206/4206 [==============================] - 3s 735us/step - loss: 0.1546 - acc: 0.9529 - recall: 0.9437 - val_loss: 0.2113 - val_acc: 0.9315 - val_recall: 0.9180
Epoch 13/15000
4206/4206 [==============================] - 3s 764us/step - loss: 0.1126 - acc: 0.9660 - recall: 0.9591 - val_loss: 0.1944 - val_acc: 0.9465 - val_recall: 0.9422
Epoch 14/15000
4206/4206 [==============================] - 3s 813us/step - loss: 0.0918 - acc: 0.9691 - recall: 0.9631 - val_loss: 0.1882 - val_acc: 0.9415 - val_recall: 0.9351
Epoch 15/15000
4206/4206 [==============================] - 3s 776us/step - loss: 0.0641 - acc: 0.9831 - recall: 0.9803 - val_loss: 0.1810 - val_acc: 0.9494 - val_recall: 0.9465
Epoch 16/15000
4206/4206 [==============================] - 3s 746us/step - loss: 0.0749 - acc: 0.9750 - recall: 0.9715 - val_loss: 0.2843 - val_acc: 0.9215 - val_recall: 0.9165
Epoch 17/15000
4206/4206 [==============================] - 3s 798us/step - loss: 0.0981 - acc: 0.9722 - recall: 0.9681 - val_loss: 0.2001 - val_acc: 0.9465 - val_recall: 0.9437
Epoch 18/15000
4206/4206 [==============================] - 3s 768us/step - loss: 0.0756 - acc: 0.9769 - recall: 0.9736 - val_loss: 0.1685 - val_acc: 0.9486 - val_recall: 0.9444
Epoch 19/15000
4206/4206 [==============================] - 3s 758us/step - loss: 0.0670 - acc: 0.9798 - recall: 0.9774 - val_loss: 0.1427 - val_acc: 0.9622 - val_recall: 0.9593
Epoch 20/15000
4206/4206 [==============================] - 3s 774us/step - loss: 0.0316 - acc: 0.9910 - recall: 0.9898 - val_loss: 0.1662 - val_acc: 0.9565 - val_recall: 0.9508
Epoch 21/15000
4206/4206 [==============================] - 3s 737us/step - loss: 0.0602 - acc: 0.9812 - recall: 0.9793 - val_loss: 0.1752 - val_acc: 0.9544 - val_recall: 0.9536
CPU times: user 1min 59s, sys: 12.2 s, total: 2min 11s
Wall time: 1min 16s
Out[117]:
<keras.callbacks.History at 0x48214f90>
In [118]:
model_bidi_lstm.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
bidirectional_1 (Bidirection (None, 1, 512)            616448    
_________________________________________________________________
bidirectional_2 (Bidirection (None, 200)               490400    
_________________________________________________________________
dense_11 (Dense)             (None, 256)               51456     
_________________________________________________________________
activation_11 (Activation)   (None, 256)               0         
_________________________________________________________________
dropout_6 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_12 (Dense)             (None, 256)               65792     
_________________________________________________________________
activation_12 (Activation)   (None, 256)               0         
_________________________________________________________________
dropout_7 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_13 (Dense)             (None, 10)                2570      
_________________________________________________________________
activation_13 (Activation)   (None, 10)                0         
=================================================================
Total params: 1,226,666
Trainable params: 1,226,666
Non-trainable params: 0
_________________________________________________________________
In [119]:
train_preds_basic_bidi_lstm_proba,test_preds_basic_bidi_lstm_proba = get_predicted_class_prob(model_bidi_lstm,x_rnn_train, x_rnn_test)
train_preds_basic_bidi_lstm,test_preds_basic_bidi_lstm = get_predicted_classes(model_bidi_lstm,x_rnn_train, x_rnn_test)
In [120]:
model_bidi_lstm_df = print_metrics(model_bidi_lstm,x_rnn_train,y_train,x_rnn_test,y_test )
['loss', 'acc', 'recall']
5608/5608 [==============================] - 1s 182us/step
[0.060588648395955604, 0.9827032810271041, 0.9818116975748931]
1402/1402 [==============================] - 0s 163us/step
[0.13994533056459693, 0.9600570613409415, 0.9593437945791726]
In [121]:
model_bidi_lstm_df
Out[121]:
blues classical country disco hiphop jazz macro avg metal micro avg pop reggae rock weighted avg
f1-score 0.979167 0.981949 0.954357 0.915129 0.957854 0.954064 0.959391 0.977918 0.960057 0.996491 0.963934 0.913043 0.960337
precision 1.000000 1.000000 0.950413 0.867133 0.968992 0.950704 0.959936 0.987261 0.960057 1.000000 0.948387 0.926471 0.961430
recall 0.959184 0.964539 0.958333 0.968750 0.946970 0.957447 0.959698 0.968750 0.960057 0.993007 0.980000 0.900000 0.960057
support 147.000000 141.000000 120.000000 128.000000 132.000000 141.000000 1402.000000 160.000000 1402.000000 143.000000 150.000000 140.000000 1402.000000
In [122]:
saveModel(model_bidi_lstm,'Bidirectional_LSTM_basic')
Saved model Bidirectional_LSTM_basic to disk
In [123]:
model_bidi_lstm_df.to_csv("model_bidi_lstm_df.csv")

New LSTM

In [ ]:
model_name = 'New_LSTM'
tb = TensorBoard(log_dir='./log/{}'.format(model_name), histogram_freq=0,  
          write_graph=True, write_images=True)
callbacks = [EarlyStopping(monitor='val_loss', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_acc', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_recall', min_delta= 0.005, patience=20),tb]

model_lstm2 = Sequential()
model_lstm2.add(LSTM(units=128, dropout=0.05, recurrent_dropout=0.35, return_sequences=True, input_shape=(x_rnn_train.shape[1], x_rnn_train.shape[2])))
model_lstm2.add(LSTM(units=32, dropout=0.05, recurrent_dropout=0.35, return_sequences=False))
model_lstm2.add(Dense(units=num_labels, activation='softmax'))
model_lstm2.add(Dropout(0.5))
model_lstm2.compile(loss='categorical_crossentropy', optimizer=Adam(), metrics=['accuracy',recall])
In [161]:
model_bidi_lstm2.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
bidirectional_3 (Bidirection (None, 1, 256)            177152    
_________________________________________________________________
bidirectional_4 (Bidirection (None, 200)               285600    
_________________________________________________________________
dense_15 (Dense)             (None, 14)                2814      
_________________________________________________________________
activation_14 (Activation)   (None, 14)                0         
_________________________________________________________________
dropout_9 (Dropout)          (None, 14)                0         
_________________________________________________________________
dense_16 (Dense)             (None, 10)                150       
_________________________________________________________________
activation_15 (Activation)   (None, 10)                0         
=================================================================
Total params: 465,716
Trainable params: 465,716
Non-trainable params: 0
_________________________________________________________________
In [126]:
%time model_lstm2.fit(x_rnn_train, y_train, callbacks = callbacks, batch_size=50, epochs=400, validation_split=0.25)
Train on 4206 samples, validate on 1402 samples
Epoch 1/400
4206/4206 [==============================] - 1s 311us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 2/400
4206/4206 [==============================] - 1s 294us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 3/400
4206/4206 [==============================] - 1s 294us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 4/400
4206/4206 [==============================] - 1s 302us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 5/400
4206/4206 [==============================] - 1s 297us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 6/400
4206/4206 [==============================] - 1s 303us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 7/400
4206/4206 [==============================] - 1s 310us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 8/400
4206/4206 [==============================] - 1s 298us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 9/400
4206/4206 [==============================] - 1s 294us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 10/400
4206/4206 [==============================] - 1s 312us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 11/400
4206/4206 [==============================] - 1s 303us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 12/400
4206/4206 [==============================] - 1s 279us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 13/400
4206/4206 [==============================] - 1s 271us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 14/400
4206/4206 [==============================] - 1s 302us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 15/400
4206/4206 [==============================] - 1s 292us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 16/400
4206/4206 [==============================] - 1s 263us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 17/400
4206/4206 [==============================] - 1s 298us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 18/400
4206/4206 [==============================] - 1s 285us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 19/400
4206/4206 [==============================] - 1s 289us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 20/400
4206/4206 [==============================] - 1s 289us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
Epoch 21/400
4206/4206 [==============================] - 1s 283us/step - loss: 1.1921e-07 - acc: 0.1034 - recall: 10.0000 - val_loss: 1.1921e-07 - val_acc: 0.0849 - val_recall: 10.0000
CPU times: user 44.5 s, sys: 4.27 s, total: 48.7 s
Wall time: 28 s
Out[126]:
<keras.callbacks.History at 0x48201b90>
In [127]:
train_preds_basic_lstm2_proba,test_preds_basic_lstm2_proba = get_predicted_class_prob(model_lstm2,x_rnn_train, x_rnn_test)
train_preds_basic_lstm2,test_preds_basic_lstm2 = get_predicted_classes(model_lstm2,x_rnn_train, x_rnn_test)
In [128]:
model_lstm2_df = print_metrics(model_lstm2,x_rnn_train,y_train,x_rnn_test,y_test )
['loss', 'acc', 'recall']
5608/5608 [==============================] - 1s 131us/step
[1.1920930376163597e-07, 0.09878744650499287, 10.0]
1402/1402 [==============================] - 0s 128us/step
[1.1920930376163597e-07, 0.10485021408631356, 10.0]
/home/bglrgpuaccess/.local/lib/python2.7/site-packages/sklearn/metrics/classification.py:1143: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
  'precision', 'predicted', average, warn_for)
In [129]:
model_lstm2_df
Out[129]:
blues classical country disco hiphop jazz macro avg metal micro avg pop reggae rock weighted avg
f1-score 0.18980 0.0 0.0 0.0 0.0 0.0 0.018980 0.0 0.10485 0.0 0.0 0.0 0.019901
precision 0.10485 0.0 0.0 0.0 0.0 0.0 0.010485 0.0 0.10485 0.0 0.0 0.0 0.010994
recall 1.00000 0.0 0.0 0.0 0.0 0.0 0.100000 0.0 0.10485 0.0 0.0 0.0 0.104850
support 147.00000 141.0 120.0 128.0 132.0 141.0 1402.000000 160.0 1402.00000 143.0 150.0 140.0 1402.000000
In [130]:
saveModel(model_lstm2,"model_lstm2")
Saved model model_lstm2 to disk
In [131]:
model_lstm2_df.to_csv("model_lstm2_df.csv")

Bi Di LSTM - NEW

In [132]:
model_name = 'New_Bidirectional_LSTM-1'
tb = TensorBoard(log_dir='./log/{}'.format(model_name), histogram_freq=0,  
          write_graph=True, write_images=True)
callbacks = [EarlyStopping(monitor='val_loss', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_acc', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_recall', min_delta= 0.005, patience=20),tb]



model_bidi_lstm2 = Sequential()
model_bidi_lstm2.add(Bidirectional(LSTM(128 ,return_sequences=True),
                        input_shape=(x_rnn_train.shape[1], x_rnn_train.shape[2])))

model_bidi_lstm2.add(Bidirectional(LSTM(100)))


model_bidi_lstm2.add(Dense(14))
model_bidi_lstm2.add(Activation('relu'))
model_bidi_lstm2.add(Dropout(0.5))

model_bidi_lstm2.add(Dense(num_labels))
model_bidi_lstm2.add(Activation('softmax'))

model_bidi_lstm2.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['acc',recall])
In [162]:
model_bidi_lstm2.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
bidirectional_3 (Bidirection (None, 1, 256)            177152    
_________________________________________________________________
bidirectional_4 (Bidirection (None, 200)               285600    
_________________________________________________________________
dense_15 (Dense)             (None, 14)                2814      
_________________________________________________________________
activation_14 (Activation)   (None, 14)                0         
_________________________________________________________________
dropout_9 (Dropout)          (None, 14)                0         
_________________________________________________________________
dense_16 (Dense)             (None, 10)                150       
_________________________________________________________________
activation_15 (Activation)   (None, 10)                0         
=================================================================
Total params: 465,716
Trainable params: 465,716
Non-trainable params: 0
_________________________________________________________________
In [133]:
%time model_bidi_lstm2.fit(x_rnn_train, y_train, callbacks = callbacks,epochs=15000, validation_split=0.25,batch_size=30)
Train on 4206 samples, validate on 1402 samples
Epoch 1/15000
4206/4206 [==============================] - 6s 1ms/step - loss: 2.0140 - acc: 0.2646 - recall: 0.0561 - val_loss: 1.6303 - val_acc: 0.4337 - val_recall: 0.1569
Epoch 2/15000
4206/4206 [==============================] - 3s 714us/step - loss: 1.6145 - acc: 0.4151 - recall: 0.1812 - val_loss: 1.3285 - val_acc: 0.5813 - val_recall: 0.2618
Epoch 3/15000
4206/4206 [==============================] - 3s 754us/step - loss: 1.3900 - acc: 0.4905 - recall: 0.2789 - val_loss: 1.1416 - val_acc: 0.6619 - val_recall: 0.3559
Epoch 4/15000
4206/4206 [==============================] - 3s 729us/step - loss: 1.2488 - acc: 0.5523 - recall: 0.3424 - val_loss: 0.9856 - val_acc: 0.6904 - val_recall: 0.4544
Epoch 5/15000
4206/4206 [==============================] - 3s 740us/step - loss: 1.1531 - acc: 0.5725 - recall: 0.3947 - val_loss: 0.8768 - val_acc: 0.7318 - val_recall: 0.5421
Epoch 6/15000
4206/4206 [==============================] - 3s 744us/step - loss: 1.0696 - acc: 0.6006 - recall: 0.4356 - val_loss: 0.8115 - val_acc: 0.7404 - val_recall: 0.5899
Epoch 7/15000
4206/4206 [==============================] - 3s 724us/step - loss: 0.9771 - acc: 0.6317 - recall: 0.4772 - val_loss: 0.7217 - val_acc: 0.7753 - val_recall: 0.6448
Epoch 8/15000
4206/4206 [==============================] - 3s 696us/step - loss: 0.9036 - acc: 0.6645 - recall: 0.5297 - val_loss: 0.6690 - val_acc: 0.7967 - val_recall: 0.6826
Epoch 9/15000
4206/4206 [==============================] - 3s 668us/step - loss: 0.8548 - acc: 0.6890 - recall: 0.5616 - val_loss: 0.6191 - val_acc: 0.8017 - val_recall: 0.7054
Epoch 10/15000
4206/4206 [==============================] - 3s 718us/step - loss: 0.7900 - acc: 0.7011 - recall: 0.5827 - val_loss: 0.5808 - val_acc: 0.8153 - val_recall: 0.7447
Epoch 11/15000
4206/4206 [==============================] - 3s 757us/step - loss: 0.7657 - acc: 0.7137 - recall: 0.6032 - val_loss: 0.5145 - val_acc: 0.8367 - val_recall: 0.7632
Epoch 12/15000
4206/4206 [==============================] - 3s 727us/step - loss: 0.6928 - acc: 0.7401 - recall: 0.6476 - val_loss: 0.4457 - val_acc: 0.8545 - val_recall: 0.8039
Epoch 13/15000
4206/4206 [==============================] - 3s 723us/step - loss: 0.6920 - acc: 0.7278 - recall: 0.6377 - val_loss: 0.4338 - val_acc: 0.8623 - val_recall: 0.8096
Epoch 14/15000
4206/4206 [==============================] - 3s 729us/step - loss: 0.6262 - acc: 0.7606 - recall: 0.6750 - val_loss: 0.4167 - val_acc: 0.8688 - val_recall: 0.8317
Epoch 15/15000
4206/4206 [==============================] - 3s 680us/step - loss: 0.6140 - acc: 0.7534 - recall: 0.6752 - val_loss: 0.3727 - val_acc: 0.8852 - val_recall: 0.8502
Epoch 16/15000
4206/4206 [==============================] - 3s 720us/step - loss: 0.5676 - acc: 0.7798 - recall: 0.7021 - val_loss: 0.3552 - val_acc: 0.8916 - val_recall: 0.8688
Epoch 17/15000
4206/4206 [==============================] - 3s 708us/step - loss: 0.5639 - acc: 0.7734 - recall: 0.7021 - val_loss: 0.3126 - val_acc: 0.9009 - val_recall: 0.8787
Epoch 18/15000
4206/4206 [==============================] - 3s 720us/step - loss: 0.5322 - acc: 0.7929 - recall: 0.7263 - val_loss: 0.2962 - val_acc: 0.9051 - val_recall: 0.8873
Epoch 19/15000
4206/4206 [==============================] - 3s 729us/step - loss: 0.4941 - acc: 0.7920 - recall: 0.7309 - val_loss: 0.2948 - val_acc: 0.9108 - val_recall: 0.8937
Epoch 20/15000
4206/4206 [==============================] - 3s 600us/step - loss: 0.4776 - acc: 0.8024 - recall: 0.7485 - val_loss: 0.3064 - val_acc: 0.9037 - val_recall: 0.8830
Epoch 21/15000
4206/4206 [==============================] - 3s 642us/step - loss: 0.4747 - acc: 0.7979 - recall: 0.7494 - val_loss: 0.2481 - val_acc: 0.9258 - val_recall: 0.9094
CPU times: user 1min 55s, sys: 11 s, total: 2min 6s
Wall time: 1min 12s
Out[133]:
<keras.callbacks.History at 0x1ef2d0d0>
In [134]:
model_bidi_lstm2.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
bidirectional_3 (Bidirection (None, 1, 256)            177152    
_________________________________________________________________
bidirectional_4 (Bidirection (None, 200)               285600    
_________________________________________________________________
dense_15 (Dense)             (None, 14)                2814      
_________________________________________________________________
activation_14 (Activation)   (None, 14)                0         
_________________________________________________________________
dropout_9 (Dropout)          (None, 14)                0         
_________________________________________________________________
dense_16 (Dense)             (None, 10)                150       
_________________________________________________________________
activation_15 (Activation)   (None, 10)                0         
=================================================================
Total params: 465,716
Trainable params: 465,716
Non-trainable params: 0
_________________________________________________________________
In [135]:
train_preds_bidi_lstm2_proba,test_preds_bidi_lstm2_proba = get_predicted_class_prob(model_bidi_lstm2,x_rnn_train, x_rnn_test)
train_preds_bidi_lstm2,test_preds_bidi_lstm2 = get_predicted_classes(model_bidi_lstm2,x_rnn_train, x_rnn_test)
In [136]:
model_bidi_lstm2_df = print_metrics(model_bidi_lstm2,x_rnn_train,y_train,x_rnn_test,y_test )
['loss', 'acc', 'recall']
5608/5608 [==============================] - 1s 162us/step
[0.12249356543329065, 0.9687945791726106, 0.9586305278174037]
1402/1402 [==============================] - 0s 147us/step
[0.22103330175181088, 0.9343794574070929, 0.919400855409946]
In [137]:
model_bidi_lstm2_df
Out[137]:
blues classical country disco hiphop jazz macro avg metal micro avg pop reggae rock weighted avg
f1-score 0.960265 0.972028 0.853448 0.923077 0.923077 0.942446 0.932222 0.964856 0.934379 0.982456 0.948454 0.852113 0.934341
precision 0.935484 0.958621 0.883929 0.909091 0.893617 0.956204 0.932879 0.986928 0.934379 0.985915 0.978723 0.840278 0.935264
recall 0.986395 0.985816 0.825000 0.937500 0.954545 0.929078 0.932539 0.943750 0.934379 0.979021 0.920000 0.864286 0.934379
support 147.000000 141.000000 120.000000 128.000000 132.000000 141.000000 1402.000000 160.000000 1402.000000 143.000000 150.000000 140.000000 1402.000000
In [138]:
saveModel(model_bidi_lstm2,"model_bidi_lstm2")
Saved model model_bidi_lstm2 to disk
In [139]:
model_bidi_lstm2_df.to_csv("model_bidi_lstm2_df.csv")

Ensemble of Deep Learning models

In [140]:
(train_preds_basic_nn_proba.shape) == (train_preds_basic_rnn_proba.shape) == (train_preds_basic_lstm_proba.shape) == (train_preds_basic_bidi_lstm_proba.shape) == (train_preds_basic_lstm2_proba.shape) == (train_preds_bidi_lstm2_proba.shape)
Out[140]:
True
In [141]:
(test_preds_basic_nn_proba.shape) == (test_preds_basic_rnn_proba.shape) == (test_preds_basic_lstm_proba.shape) == (test_preds_basic_bidi_lstm_proba.shape) == (test_preds_basic_lstm2_proba.shape) == (test_preds_bidi_lstm2_proba.shape)
Out[141]:
True
In [142]:
train_proba_lists =[train_preds_basic_nn_proba, train_preds_basic_rnn_proba,train_preds_basic_lstm_proba,train_preds_basic_bidi_lstm_proba,train_preds_basic_lstm2_proba,train_preds_bidi_lstm2_proba]
In [143]:
test_proba_lists =[test_preds_basic_nn_proba, test_preds_basic_rnn_proba,test_preds_basic_lstm_proba,test_preds_basic_bidi_lstm_proba,test_preds_basic_lstm2_proba,test_preds_bidi_lstm2_proba]
In [144]:
train_ensem_proba = pd.concat([i for i in train_proba_lists],axis = 1)
In [145]:
test_ensem_proba = pd.concat([i for i in test_proba_lists],axis = 1)
In [147]:
model_name = 'Ensemble_NN'
tb = TensorBoard(log_dir='./log/{}'.format(model_name), histogram_freq=0,  
          write_graph=True, write_images=True)
callbacks = [EarlyStopping(monitor='val_loss', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_acc', min_delta= 0.005, patience=20),
            EarlyStopping(monitor='val_recall', min_delta= 0.005, patience=20),tb]

model_ensemble_nn = Sequential()

model_ensemble_nn.add(Dense(128, input_shape=(train_ensem_proba.shape[1],)))
model_ensemble_nn.add(Activation('relu'))
model_ensemble_nn.add(Dropout(0.5))

model_ensemble_nn.add(Dense(65))
model_ensemble_nn.add(Activation('relu'))
model_ensemble_nn.add(Dropout(0.5))

model_ensemble_nn.add(Dense(num_labels))
model_ensemble_nn.add(Activation('softmax'))

model_ensemble_nn.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['acc',recall])
In [148]:
model_ensemble_nn.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_17 (Dense)             (None, 128)               7808      
_________________________________________________________________
activation_16 (Activation)   (None, 128)               0         
_________________________________________________________________
dropout_10 (Dropout)         (None, 128)               0         
_________________________________________________________________
dense_18 (Dense)             (None, 65)                8385      
_________________________________________________________________
activation_17 (Activation)   (None, 65)                0         
_________________________________________________________________
dropout_11 (Dropout)         (None, 65)                0         
_________________________________________________________________
dense_19 (Dense)             (None, 10)                660       
_________________________________________________________________
activation_18 (Activation)   (None, 10)                0         
=================================================================
Total params: 16,853
Trainable params: 16,853
Non-trainable params: 0
_________________________________________________________________
In [152]:
%time model_ensemble_nn.fit(train_ensem_proba,y_train,callbacks = callbacks,epochs=15000, validation_split=0.25)
Train on 4206 samples, validate on 1402 samples
Epoch 1/15000
4206/4206 [==============================] - 1s 149us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 2/15000
4206/4206 [==============================] - 1s 149us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3058 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 3/15000
4206/4206 [==============================] - 1s 154us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 4/15000
4206/4206 [==============================] - 1s 153us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3060 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 5/15000
4206/4206 [==============================] - 1s 159us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 6/15000
4206/4206 [==============================] - 1s 157us/step - loss: 2.3022 - acc: 0.1003 - recall: 0.0000e+00 - val_loss: 2.3058 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 7/15000
4206/4206 [==============================] - 1s 145us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 8/15000
4206/4206 [==============================] - 1s 143us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3058 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 9/15000
4206/4206 [==============================] - 1s 154us/step - loss: 2.3022 - acc: 0.1029 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 10/15000
4206/4206 [==============================] - 1s 144us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3061 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 11/15000
4206/4206 [==============================] - 1s 149us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3058 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 12/15000
4206/4206 [==============================] - 1s 133us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 13/15000
4206/4206 [==============================] - 1s 126us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 14/15000
4206/4206 [==============================] - 1s 143us/step - loss: 2.3021 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 15/15000
4206/4206 [==============================] - 1s 149us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3057 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 16/15000
4206/4206 [==============================] - 1s 126us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3058 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 17/15000
4206/4206 [==============================] - 1s 119us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3060 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 18/15000
4206/4206 [==============================] - 1s 123us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 19/15000
4206/4206 [==============================] - 1s 119us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 20/15000
4206/4206 [==============================] - 0s 115us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
Epoch 21/15000
4206/4206 [==============================] - 0s 107us/step - loss: 2.3022 - acc: 0.1049 - recall: 0.0000e+00 - val_loss: 2.3059 - val_acc: 0.0856 - val_recall: 0.0000e+00
CPU times: user 24 s, sys: 2.56 s, total: 26.6 s
Wall time: 15.5 s
Out[152]:
<keras.callbacks.History at 0x4e3699d0>
In [150]:
model_ensemble_nn_df = print_metrics(model_ensemble_nn,train_ensem_proba,y_train,test_ensem_proba,y_test )
['loss', 'acc', 'recall']
5608/5608 [==============================] - 0s 49us/step
[2.3029128471216698, 0.10003566333808844, 0.0]
1402/1402 [==============================] - 0s 49us/step
[2.3039552777708003, 0.09985734670078873, 0.0]
In [151]:
model_ensemble_nn_df
Out[151]:
blues classical country disco hiphop jazz macro avg metal micro avg pop reggae rock weighted avg
f1-score 0.0 0.0 0.0 0.0 0.0 0.0 0.018158 0.0 0.099857 0.0 0.0 0.181582 0.018132
precision 0.0 0.0 0.0 0.0 0.0 0.0 0.009986 0.0 0.099857 0.0 0.0 0.099857 0.009971
recall 0.0 0.0 0.0 0.0 0.0 0.0 0.100000 0.0 0.099857 0.0 0.0 1.000000 0.099857
support 147.0 141.0 120.0 128.0 132.0 141.0 1402.000000 160.0 1402.000000 143.0 150.0 140.000000 1402.000000
In [153]:
saveModel(model_ensemble_nn,'model_ensemble_nn')
Saved model model_ensemble_nn to disk
In [154]:
model_ensemble_nn_df.to_csv("model_ensemble_nn_df.csv")

create a df for plotting

In [155]:
model_perf_train_df_names = ['model_basic_nn_df','model_basic_rnn_df','model_lstm_df',
'model_bidi_lstm_df', 'model_lstm2_df', 'model_bidi_lstm2_df']
In [156]:
model_perf_train_df = [model_basic_nn_df,model_basic_rnn_df,model_lstm_df,
model_bidi_lstm_df, model_lstm2_df, model_bidi_lstm2_df]
In [157]:
model_basic_nn_df
Out[157]:
blues classical country disco hiphop jazz macro avg metal micro avg pop reggae rock weighted avg
f1-score 0.996610 0.992908 0.979253 0.972549 0.959707 0.985816 0.976462 0.961039 0.976462 0.989399 0.986577 0.940767 0.976551
precision 0.993243 0.992908 0.975207 0.976378 0.929078 0.985816 0.976424 1.000000 0.976462 1.000000 0.993243 0.918367 0.977321
recall 1.000000 0.992908 0.983333 0.968750 0.992424 0.985816 0.977154 0.925000 0.976462 0.979021 0.980000 0.964286 0.976462
support 147.000000 141.000000 120.000000 128.000000 132.000000 141.000000 1402.000000 160.000000 1402.000000 143.000000 150.000000 140.000000 1402.000000
In [158]:
all_model_stats = pd.DataFrame([],columns=lb.classes_)
all_model_stats['model'] = ''
In [159]:
for i in zip(model_perf_train_df_names,model_perf_train_df):
    i[1]['model'] = i[0]
    all_model_stats = pd.concat([all_model_stats,i[1]], axis = 0)
In [160]:
all_model_stats
Out[160]:
blues classical country disco hiphop jazz macro avg metal micro avg model pop reggae rock weighted avg
f1-score 0.996610 0.992908 0.979253 0.972549 0.959707 0.985816 0.976462 0.961039 0.976462 model_basic_nn_df 0.989399 0.986577 0.940767 0.976551
precision 0.993243 0.992908 0.975207 0.976378 0.929078 0.985816 0.976424 1.000000 0.976462 model_basic_nn_df 1.000000 0.993243 0.918367 0.977321
recall 1.000000 0.992908 0.983333 0.968750 0.992424 0.985816 0.977154 0.925000 0.976462 model_basic_nn_df 0.979021 0.980000 0.964286 0.976462
support 147.000000 141.000000 120.000000 128.000000 132.000000 141.000000 1402.000000 160.000000 1402.000000 model_basic_nn_df 143.000000 150.000000 140.000000 1402.000000
f1-score 0.986301 0.975439 0.953975 0.957198 0.947368 0.975610 0.965139 0.984326 0.965763 model_basic_rnn_df 0.975265 0.945205 0.950704 0.965742
precision 0.993103 0.965278 0.957983 0.953488 0.940299 0.958904 0.965152 0.987421 0.965763 model_basic_rnn_df 0.985714 0.971831 0.937500 0.966041
recall 0.979592 0.985816 0.950000 0.960938 0.954545 0.992908 0.965437 0.981250 0.965763 model_basic_rnn_df 0.965035 0.920000 0.964286 0.965763
support 147.000000 141.000000 120.000000 128.000000 132.000000 141.000000 1402.000000 160.000000 1402.000000 model_basic_rnn_df 143.000000 150.000000 140.000000 1402.000000
f1-score 0.943144 0.975779 0.900000 0.900000 0.906475 0.936170 0.929440 0.965300 0.930813 model_lstm_df 0.957143 0.903915 0.906475 0.930738
precision 0.927632 0.952703 0.900000 0.886364 0.863014 0.936170 0.930102 0.974522 0.930813 model_lstm_df 0.978102 0.969466 0.913043 0.932313
recall 0.959184 1.000000 0.900000 0.914062 0.954545 0.936170 0.930394 0.956250 0.930813 model_lstm_df 0.937063 0.846667 0.900000 0.930813
support 147.000000 141.000000 120.000000 128.000000 132.000000 141.000000 1402.000000 160.000000 1402.000000 model_lstm_df 143.000000 150.000000 140.000000 1402.000000
f1-score 0.979167 0.981949 0.954357 0.915129 0.957854 0.954064 0.959391 0.977918 0.960057 model_bidi_lstm_df 0.996491 0.963934 0.913043 0.960337
precision 1.000000 1.000000 0.950413 0.867133 0.968992 0.950704 0.959936 0.987261 0.960057 model_bidi_lstm_df 1.000000 0.948387 0.926471 0.961430
recall 0.959184 0.964539 0.958333 0.968750 0.946970 0.957447 0.959698 0.968750 0.960057 model_bidi_lstm_df 0.993007 0.980000 0.900000 0.960057
support 147.000000 141.000000 120.000000 128.000000 132.000000 141.000000 1402.000000 160.000000 1402.000000 model_bidi_lstm_df 143.000000 150.000000 140.000000 1402.000000
f1-score 0.189800 0.000000 0.000000 0.000000 0.000000 0.000000 0.018980 0.000000 0.104850 model_lstm2_df 0.000000 0.000000 0.000000 0.019901
precision 0.104850 0.000000 0.000000 0.000000 0.000000 0.000000 0.010485 0.000000 0.104850 model_lstm2_df 0.000000 0.000000 0.000000 0.010994
recall 1.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.100000 0.000000 0.104850 model_lstm2_df 0.000000 0.000000 0.000000 0.104850
support 147.000000 141.000000 120.000000 128.000000 132.000000 141.000000 1402.000000 160.000000 1402.000000 model_lstm2_df 143.000000 150.000000 140.000000 1402.000000
f1-score 0.960265 0.972028 0.853448 0.923077 0.923077 0.942446 0.932222 0.964856 0.934379 model_bidi_lstm2_df 0.982456 0.948454 0.852113 0.934341
precision 0.935484 0.958621 0.883929 0.909091 0.893617 0.956204 0.932879 0.986928 0.934379 model_bidi_lstm2_df 0.985915 0.978723 0.840278 0.935264
recall 0.986395 0.985816 0.825000 0.937500 0.954545 0.929078 0.932539 0.943750 0.934379 model_bidi_lstm2_df 0.979021 0.920000 0.864286 0.934379
support 147.000000 141.000000 120.000000 128.000000 132.000000 141.000000 1402.000000 160.000000 1402.000000 model_bidi_lstm2_df 143.000000 150.000000 140.000000 1402.000000
In [187]:
all_models_recall = pd.read_csv("all_models_recall.csv")
In [188]:
all_models_recall
Out[188]:
Model Name Number of layers Number of parameters Time to train blues classical country disco hiphop jazz metal pop reggae rock
0 Basic Neural Network 5 45706 26.8 1.000000 0.992908 0.983333 0.968750 0.992424 0.985816 0.92500 0.979021 0.980000 0.964286
1 Basic RNN 8 211210 16.5 0.979592 0.985816 0.950000 0.960938 0.954545 0.992908 0.98125 0.965035 0.920000 0.964286
2 Basic LSTM 12 508682 31.3 0.959184 1.000000 0.900000 0.914062 0.954545 0.936170 0.95625 0.937063 0.846667 0.900000
3 Bidirectional LSTM 9 1226666 76.0 0.959184 0.964539 0.958333 0.968750 0.946970 0.957447 0.96875 0.993007 0.980000 0.900000
4 LSTM 2 6 465716 28.0 1.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.00000 0.000000 0.000000 0.000000
5 Bidirectional LSTM 2 6 465716 72.0 0.986395 0.985816 0.825000 0.937500 0.954545 0.929078 0.94375 0.979021 0.920000 0.864286
6 Ensemble 7 16853 15.5 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.00000 0.000000 0.000000 1.000000
In [195]:
fig, ax = plt.subplots( nrows=1, ncols=1 )
plt.bar(all_models_recall['Model Name'], all_models_recall['Number of parameters'])
plt.xticks(rotation=70)
plt.title("Number of parameters")
plt.show()
-- End --
© 2019 Copyright Built and deployed by Chaithanya, Mahidhar and Seema